Except the backend db crud and create file by memory stream,
I post two parts of it, the first is how I resolve this by original javascript and jquery library,
and the second would be some web.config which I need to adjust.
cshtml page
// Large File Upload
window.BlobBuilder = window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder;
const BYTES_PER_CHUNK = 1048576*1; // 1MB chunk sizes.
var requestArr = []; // Request Waiting For Sending
var reqDataArr = []; // Request Data Waiting For Upload
var uploading = []; // Uploading Files
var Upload_Wait_InLine_Arr = []; // Array Waiting For Upload
var uploadOK = []; // Array of Upload Finished
// When File is selected at upload input
function fileSelected(id) {
// if fileSize exists, then need to clear the previous file before upload new file.
let fileSizeExisted = $("#file_size" + id).text();
if (fileSizeExisted) {
$("#fileToUpload" + id).val('');
alert('Please clear previous file before upload.');
return;
}
let file = document.getElementById('fileToUpload'+id).files[0];
// Display Name and Size of The File.
if (file) {
let fileSize = 0;
if (file.size > 1024 * 1024)
fileSize = (Math.round(file.size * 100 / (1024 * 1024)) / 100).toString() + 'MB';
else
fileSize = (Math.round(file.size * 100 / 1024) / 100).toString() + 'KB';
$("#fileName" + id).html(file.name);
$("#file_size" + id).html(fileSize);
}
}
function sendRequest() {
// Refresh The Queues
reqDataArr = [];
uploading = [];
let nextID = Upload_Wait_InLine_Arr[0];
let blob = document.getElementById('fileToUpload' + nextID).files[0];
let FileNo = $('#file_no' + nextID).text();
let FileName = blob.name;
//----------total_file_size----------
let FileSize = blob.size;
if (blob.size > 1024 * 1024)
FileSize = (Math.round(blob.size * 100 / (1024 * 1024)) / 100).toString() + 'MB';
else
FileSize = (Math.round(blob.size * 100 / 1024) / 100).toString() + 'KB';
//----------total_file_size----------
//file_status
$("#status" + nextID).html("Uploading");
const SIZE = blob.size;
sessionStorage.setItem("totalSize" + nextID, SIZE.toString());
const totalPart = Math.ceil(SIZE / BYTES_PER_CHUNK)
let start = 0;
let end = BYTES_PER_CHUNK;
let part = 1;
while (start < SIZE) {
let chunk = blob.slice(start, end);
uploadFile(chunk, part, totalPart, nextID, FileName, FileSize, FileNo);
part++;
start = end;
end = start + BYTES_PER_CHUNK;
}
StartXHR();
}
function uploadFile(blobFile, part, totalPart, id, FileName, FileSize, FileNo) {
let fd = new FormData();
fd.append("fileToUpload", blobFile);
fd.append("part", part);
fd.append("totalPart", totalPart);
fd.append("id", id);
fd.append("FileName", FileName);
fd.append("FileSize", FileSize);
fd.append("FileNo", FileNo)
reqDataArr.push(fd);
}
function StartXHR() {
// reverse then pop, like shift(), doing a FIFO (Queue)
reqDataArr.reverse();
requestArr = [];
//Set How Many XHR Sent By One Time
let XHRSentNum = 1;
for (let i = 0; i < XHRSentNum; i++) {
let xhr = new XMLHttpRequest();
//xhr.upload.addEventListener("progress", uploadProgress, false);
xhr.addEventListener("loadstart", loadstart, false);
xhr.addEventListener("load", uploadComplete, false);
xhr.addEventListener("error", uploadFailed, false);
xhr.addEventListener("abort", uploadCanceled, false);
xhr.addEventListener("timeout", timeout, false);
xhr.addEventListener("loadend", loadend, false);
xhr.tryTime = 0;
xhr.SendFD = function(){
let data = reqDataArr.pop();
uploading.push(data);
if(data){
xhr.open("POST", "@Url.Action("Upload","File")");
xhr.data = data;
xhr.tryTime++;
xhr.send(data);
}
}
requestArr.push(xhr);
}
//Sent Form Data
for(let i = 0; i < XHRSentNum; i++){
requestArr[i].SendFD();
}
}
// CallBacks
function loadend(evt) {
// if queue length is 0, then abort
if (Upload_Wait_InLine_Arr.length == 0) {
this.abort();
return;
}
let nextID = Upload_Wait_InLine_Arr[0];
// Upload Progress
let current = parseInt(sessionStorage.getItem("current" + nextID));
let totalSize = parseInt(sessionStorage.getItem("totalSize" + nextID));
let percentage = Math.round(current / totalSize * 100);
current += BYTES_PER_CHUNK;
sessionStorage.setItem("current" + nextID, current.toString());
if (totalSize < BYTES_PER_CHUNK)
percentage = 100;
document.getElementById('progressNumber' + nextID).innerHTML = percentage.toString() + '%';
CheckFileUploadSuccess(this);
}
function timeout(evt) {
}
function loadstart(evt) {
}
function uploadComplete(evt) {
}
function uploadFailed(evt) {
}
function uploadCanceled(evt) {
}
// Data Form Object
function CheckFileUploadSuccess(fd_obj) {
let upload_success = false;
// Check FIle Upload Is Success Or Not
$.ajax({
url: "@Url.Action("CheckFileExist", "File")",
type: "POST",
data: { "id": fd_obj.data.get("id"), "part": fd_obj.data.get("part"), "FileNo": fd_obj.data.get("FileNo") },
async: true,
success: function (is_success) {
upload_success = is_success;
}
}).done(function () {
if (upload_success) {
// If The File Part Is The Same
let deleteIndex = uploading.indexOf(p=>p.get("part") === fd_obj.data.get("part"));
if (deleteIndex > -1)
// Part Out of the Uploading Part Queue
uploading.splice(deleteIndex, 1);
// If this File Got Other Parts Left For Uploding, then SendFD again.
if (reqDataArr.length > 0){
fd_obj.tryTime = 0;
fd_obj.SendFD();
}
else {
let index = requestArr.indexOf(fd_obj);
// If this File is Finished the Upload, Then Out Of the Request Queue
if (index > -1)
requestArr.splice(index, 1);
else
console('continue.')
let nextID = Upload_Wait_InLine_Arr[0];
let totalSize = parseInt(sessionStorage.getItem("totalSize" + nextID));
let is_small_file = (totalSize < BYTES_PER_CHUNK);
if (requestArr.length === 0 || (is_small_file)) {
// If All Upload Request Is Done, Then Start Compress
$("#status" + nextID).html("Analizing.");
$("#progressNumber" + nextID).html('100%')
let needUnCompress = CheckUnCompress(fd_obj.data.get("id"));
if (needUnCompress) {
console.log('Need Compress.')
$("#status" + nextID).html("Compressing.");
}
// push this ID into Upload Finished Array, Then Take It Out From The Queue
uploadOK.push(nextID);
Upload_Wait_InLine_Arr.splice(0, 1);
console.log("Queue Left", Upload_Wait_InLine_Arr)
if (Upload_Wait_InLine_Arr.length > 0) {
// Continue the Next Upload File Request
sendRequest();
console.log('Continue Upload The Next File.')
}
else {
console.log("All File Have Uploaded Successfully --> Compress", uploadOK);
//Merge Uplaod Parts, And Unzip The Compress ZIP File
merge_unzip();
}
}
}
}
// If Upload Failed
else {
console.log("File Not Found", fd_obj.data.get("part"));
// Try Three Time
if (fd_obj.tryTime < 3) {
current -= BYTES_PER_CHUNK;
fd_obj.open("POST", "@Url.Action("Upload","File")");
fd_obj.send(fd_obj.data);
}
else {
fd_obj.SendFD();
}
}
})
}
// Check If This File Need Uncompress
function CheckUnCompress(file_id) {
let result = false;
$.ajax({
url: "@Url.Action("CheckUnCompress", "File")",
type: "POST",
data: { "id": file_id },
async: false,
success: function (data) {
result = data;
}
});
return result;
}
function merge_unzip() {
let new_merge_id = uploadOK[0];
let file_id = new_merge_id;
let fileNo = $("#file_no" + file_id).text();
let needUnZip = ($("#status" + file_id).text() === 'Uncompressing.');
console.log(" file_id :", file_id, " fileNo :", fileNo, " needUnZip :", needUnZip)
$.ajax({
url: "@Url.Action("Merge_Unzip", "File")",
type:"POST",
data: { "file_id": file_id, "fileNo": fileNo,"needUnZip":needUnZip },
success: function (statusCode) {
// JS slice() Is Shallow Copy, The Original Array Doesn't Change.
uploadOK = uploadOK.slice(1);
if (statusCode == 201) {
$("#status" + new_merge_id).html("Upload Successfully");
}
if (statusCode == 200) {
$('#status' + new_merge_id).html("Uncompress Successfully")
}
// If Upload Finished Array Length Greater Than 0, Need To Do Merge And Uncompress Again.
if (uploadOK.length > 0) {
merge_unzip();
}
// All Uploads Completed
if (uploadOK.length == 0) {
toastr.success("Action Successfully", 'Success:');
clearAllFiles();
$("#alert_phrase").text('Upload Successfully!')
$("#alert_phrase").css('color', '#0ac282');
}
}
})
}
// Clear All Upload Input
function clearAllFiles() {
$("input[name=fileToUpload]").val('');
$("div[name=uploadFileName]").empty();
$("div[name=uploadFileSize]").empty();
$("div[name=uploadProgressNum]").empty();
}
function clearFiles(file_id) {
if (!confirm("Are you sure, you want to clear selected file?")) {
return false;
}
// Clear Selected File
let upload_obj = document.getElementById('fileToUpload' + file_id);
upload_obj.outerHTML = upload_obj.outerHTML;
$("#status" + file_id).html("Empty");
$("#file_size" + file_id).html("");
$("#fileName" + file_id).html("");
$("#progressNumber" + file_id).html("");
$.ajax({
url:"@Url.Action("ClearFile","File")",
type: "POST",
data: { "file_id": file_id },
success: function () {
toastr.success("File clear successfully.", 'Success:');
}
})
}
// Run All Upload Action On The Page
function run_upload_list() {
Upload_Wait_InLine_Arr = [];
let files = $("input[type='file']");
files.each(function (index) {
let file_onload = $(this).val();
let data_id=$(this).attr('data-id');
if(!isEmpty(file_onload)){
console.log('start upload' + index + ' : ' + data_id);
Upload_Wait_InLine_Arr.push(data_id)
}
})
// Alert
if (Upload_Wait_InLine_Arr.length > 0) {
$("#alert_phrase").text('To prevent file lost, please do not operate other actions.');
$("#alert_phrase").css('color', 'red');
} else {
alert('There is no any upload files at inputs of the page.');
return;
}
console.log(Upload_Wait_InLine_Arr);
// If There Is Upload Files At Inputs, Upload.
for (let i = 0; i < Upload_Wait_InLine_Arr.length; i++) {
sessionStorage.setItem("current" + Upload_Wait_InLine_Arr[i], "0");
sessionStorage.setItem("totalSize" + Upload_Wait_InLine_Arr[i], "0");
}
// Set All Requests To the Init.
requestArr = [];
reqDataArr = [];
sendRequest();
}
web.config need to pay attention to these two parts.
<system.web>
<httpRuntime targetFramework="4.5.2" maxRequestLength="2147483647" executionTimeout="1600" maxQueryStringLength="10240" requestLengthDiskThreshold="102400"/>
<system.web>
<system.webServer>
<security>
<requestFiltering>
<requestLimits maxAllowedContentLength="2147483647" maxQueryString="32768" />
</requestFiltering>
</security>
<system.webServer>